use core::{Source, SourceId, SourceMap, Summary, Dependency, PackageId, Package};
-use util::{CargoResult, ChainError, Config, human};
+use util::{CargoResult, ChainError, Config, human, profile};
pub trait Registry {
fn query(&mut self, name: &Dependency) -> CargoResult<Vec<Summary>>;
let mut source = source_id.load(self.config);
// Ensure the source has fetched all necessary remote data.
+ let p = profile::start(format!("updating: {}", source_id));
try!(source.update());
+ drop(p);
if override {
self.overrides.push(source_id.clone());
try!(write!(f, "?ref={}", reference));
}
- if precise.is_some() {
- try!(write!(f, "#{}", precise.get_ref()));
+ match *precise {
+ Some(ref s) => {
+ try!(write!(f, "#{}", s.as_slice().slice_to(8)));
+ }
+ None => {}
}
},
SourceId { kind: RegistryKind, .. } => {
extern crate semver;
extern crate serialize;
extern crate term;
+extern crate time;
extern crate url;
#[phase(plugin)] extern crate regex_macros;
#[phase(plugin, link)] extern crate log;
use sources::{PathSource};
use util::config::{Config, ConfigValue};
use util::{CargoResult, Wrap, config, internal, human, ChainError, toml};
+use util::profile;
pub struct CompileOptions<'a> {
pub update: bool,
manifest_path.dir_path()));
let (packages, resolve, resolve_with_overrides, sources) = {
+ let _p = profile::start("resolving...");
let lockfile = manifest_path.dir_path().join("Cargo.lock");
let source_id = package.get_package_id().get_source_id();
}).collect::<Vec<&Target>>();
{
+ let _p = profile::start("compiling");
let mut config = try!(Config::new(*shell, update, jobs, target));
try!(scrape_target_config(&mut config, &user_configs));
use core::{SourceMap, Package, PackageId, PackageSet, Resolve, Target};
use util;
-use util::{CargoResult, ChainError, internal, Config};
+use util::{CargoResult, ChainError, internal, Config, profile};
use super::layout::{Layout, LayoutProxy};
/// Prepare this context, ensuring that all filesystem directories are in
/// place.
pub fn prepare(&mut self, pkg: &'a Package) -> CargoResult<()> {
+ let _p = profile::start("preparing layout");
+
try!(self.host.prepare().chain_error(|| {
internal(format!("couldn't prepare build directories for `{}`",
pkg.get_name()))
use core::{Package, Target};
use util;
use util::hex::short_hash;
-use util::{CargoResult, Fresh, Dirty, Freshness, internal, Require};
+use util::{CargoResult, Fresh, Dirty, Freshness, internal, Require, profile};
use super::job::Job;
use super::context::Context;
pub fn prepare<'a, 'b>(cx: &mut Context<'a, 'b>, pkg: &'a Package,
targets: &[&'a Target])
-> CargoResult<(Freshness, Job, Job)> {
+ let _p = profile::start(format!("fingerprint: {}", pkg));
let filename = format!(".{}.{}.fingerprint", pkg.get_name(),
short_hash(pkg.get_package_id()));
let filename = filename.as_slice();
use core::{Package, PackageId, Resolve};
use util::{Config, TaskPool, DependencyQueue, Fresh, Dirty, Freshness};
-use util::CargoResult;
+use util::{CargoResult, profile};
use super::job::Job;
/// necessary dependencies, in order. Freshness is propagated as far as
/// possible along each dependency chain.
pub fn execute(&mut self) -> CargoResult<()> {
+ let _p = profile::start("executing the job graph");
+
// Iteratively execute the dependency graph. Each turn of this loop will
// schedule as much work as possible and then wait for one job to finish,
// possibly scheduling more work afterwards.
use core::{SourceMap, Package, PackageId, PackageSet, Target, Resolve};
use util;
use util::{CargoResult, ProcessBuilder, CargoError, human, caused_human};
-use util::{Config, Freshness, internal, ChainError};
+use util::{Config, Freshness, internal, ChainError, profile};
use self::job::Job;
use self::job_queue::JobQueue;
jobs: &mut Vec<(&'a Package, Freshness, (Job, Job))>)
-> CargoResult<()> {
debug!("compile_pkg; pkg={}; targets={}", pkg, targets);
+ let _p = profile::start(format!("preparing: {}", pkg));
if targets.is_empty() {
return Ok(())
pub mod paths;
pub mod errors;
pub mod hex;
+pub mod profile;
mod pool;
mod dependency_queue;
mod to_url;
--- /dev/null
+use std::os;
+use std::mem;
+use std::fmt::Show;
+use time;
+
+local_data_key!(profile_stack: Vec<u64>)
+local_data_key!(messages: Vec<Message>)
+
+type Message = (uint, u64, String);
+
+pub struct Profiler {
+ desc: String,
+}
+
+fn enabled() -> bool { os::getenv("CARGO_PROFILE").is_some() }
+
+pub fn start<T: Show>(desc: T) -> Profiler {
+ if !enabled() { return Profiler { desc: String::new() } }
+
+ let mut stack = profile_stack.replace(None).unwrap_or(Vec::new());
+ stack.push(time::precise_time_ns());
+ profile_stack.replace(Some(stack));
+
+ Profiler {
+ desc: desc.to_string(),
+ }
+}
+
+impl Drop for Profiler {
+ fn drop(&mut self) {
+ if !enabled() { return }
+
+ let mut stack = profile_stack.replace(None).unwrap_or(Vec::new());
+ let mut msgs = messages.replace(None).unwrap_or(Vec::new());
+
+ let start = stack.pop().unwrap();
+ let end = time::precise_time_ns();
+
+ let msg = mem::replace(&mut self.desc, String::new());
+ if stack.len() == 0 {
+ fn print(lvl: uint, msgs: &[Message]) {
+ let mut last = 0;
+ for (i, &(l, time, ref msg)) in msgs.iter().enumerate() {
+ if l != lvl { continue }
+ println!("{} {:6}ms - {}", " ".repeat(lvl + 1),
+ time / 1000000, msg);
+
+ print(lvl + 1, msgs.slice(last, i));
+ last = i;
+ }
+
+ }
+ msgs.push((0, end - start, msg));
+ print(0, msgs.as_slice());
+ } else {
+ msgs.push((stack.len(), end - start, msg));
+ }
+ profile_stack.replace(Some(stack));
+ messages.replace(Some(msgs));
+
+ }
+}